Matrix


In [12]:
import numpy as np
a = np.matrix([[1,2],[3,4]])
b = 2*np.eye(2)

In [14]:
a+b


Out[14]:
matrix([[ 3.,  2.],
        [ 3.,  6.]])

Addition

  • Transform Matrix into a vector, then train ANN with supervised learning

In [23]:
from keras.models import Sequential
from keras.layers import Dense

import numpy as np
import math
%matplotlib inline
import matplotlib.pyplot as plt

model = Sequential()
model.add(Dense(8, input_dim=8, init='uniform',activation='linear'))
#model.add(Dense(4, activation='linear'))
model.add(Dense(4, activation='linear'))
model.compile(loss='mean_squared_error',optimizer='rmsprop',metrics=['accuracy'])

x_train = 100*(2*np.random.random((5000,8))-1)
y_train = x_train[:,0:4]+x_train[:,4:8]

hist = model.fit(x_train,y_train, nb_epoch=100, batch_size=30,verbose=0,shuffle=True, validation_split=0.2)

In [24]:
x_val = 100*(2*np.random.random((100,8))-1)
y_val = x_val[:,0:4]+x_val[:,4:8]

out = model.predict(x_val)

errors= np.mean(np.abs(y_val-out),axis=1)

#plt.scatter(range(len(errors)),errors,c='r',label='Accurate')
#plt.legend(loc=1)
plt.hist(errors)
plt.show()


Subtraction


In [25]:
from keras.models import Sequential
from keras.layers import Dense

import numpy as np
import math
%matplotlib inline
import matplotlib.pyplot as plt

model = Sequential()
model.add(Dense(8, input_dim=8, init='uniform',activation='linear'))
#model.add(Dense(4, activation='linear'))
model.add(Dense(4, activation='linear'))
model.compile(loss='mean_squared_error',optimizer='rmsprop',metrics=['accuracy'])

x_train = 100*(2*np.random.random((5000,8))-1)
y_train = x_train[:,0:4]-x_train[:,4:8]

hist = model.fit(x_train,y_train, nb_epoch=100, batch_size=30,verbose=0,shuffle=True, validation_split=0.2)

In [26]:
x_val = 100*(2*np.random.random((100,8))-1)
y_val = x_val[:,0:4]-x_val[:,4:8]

out = model.predict(x_val)

errors= np.mean(np.abs(y_val-out),axis=1)

#plt.scatter(range(len(errors)),errors,c='r',label='Accurate')
#plt.legend(loc=1)
plt.hist(errors)
plt.show()


Multiplication


In [55]:
from keras.models import Sequential
from keras.layers import Dense

import numpy as np
import math
%matplotlib inline
import matplotlib.pyplot as plt

model = Sequential()
model.add(Dense(16, input_dim=8, init='uniform',activation='linear'))
model.add(Dense(8, activation='linear'))
model.add(Dense(4, activation='linear'))
model.compile(loss='mean_squared_error',optimizer='rmsprop',metrics=['accuracy'])

x_train = 100*(2*np.random.random((5000,8))-1)
#y_train = x_train[:,0:4].dot(x_train[:,4:8])
y_train = np.array([x_train[i,0:4].reshape(2,2).dot(x_train[i,4:8].reshape(2,2)).reshape(1,4)[0] for i in range(len(x_train))])

hist = model.fit(x_train,y_train, nb_epoch=100, batch_size=30,verbose=0,shuffle=True, validation_split=0.2)

In [56]:
x_val = 100*(2*np.random.random((100,8))-1)
y_val = np.array([x_val[i,0:4].reshape(2,2).dot(x_val[i,4:8].reshape(2,2)).reshape(1,4)[0] for i in range(len(x_val))])

out = model.predict(x_val)

errors= np.mean(np.abs(y_val-out),axis=1)

#plt.scatter(range(len(errors)),errors,c='r',label='Accurate')
#plt.legend(loc=1)
plt.hist(errors)
plt.show()


Eigenvalues


In [6]:
import numpy as np
a = np.random.random(size=[3,3])
np.linalg.eigvals(a)
print a
np.reshape(a,(1,9))


[[ 0.33419301  0.09317074  0.04877586]
 [ 0.27404251  0.51549351  0.18365658]
 [ 0.99676429  0.3342796   0.90095057]]
Out[6]:
array([[ 0.33419301,  0.09317074,  0.04877586,  0.27404251,  0.51549351,
         0.18365658,  0.99676429,  0.3342796 ,  0.90095057]])

In [13]:
from keras.models import Sequential
from keras.layers import Dense

model = Sequential()
model.add(Dense(16, input_dim=9, init='uniform',activation='linear'))
model.add(Dense(8, activation='linear'))
model.add(Dense(3, activation='linear'))
model.compile(loss='mean_squared_error',optimizer='rmsprop',metrics=['accuracy'])

N = 500
x_train = np.random.random((N,9))

y_train = np.array([np.linalg.eigvals(np.reshape(matrix,(3,3))) for matrix in x_train])

hist = model.fit(x_train,y_train, nb_epoch=100, batch_size=30,verbose=1,shuffle=True, validation_split=0.2)


Train on 400 samples, validate on 100 samples
Epoch 1/100
400/400 [==============================] - 0s - loss: 0.6229 - acc: 0.8225 - val_loss: 0.4024 - val_acc: 0.9700
Epoch 2/100
400/400 [==============================] - 0s - loss: 0.3232 - acc: 0.9225 - val_loss: 0.1907 - val_acc: 0.9700
Epoch 3/100
400/400 [==============================] - 0s - loss: 0.2041 - acc: 0.9225 - val_loss: 0.1251 - val_acc: 0.9700
Epoch 4/100
400/400 [==============================] - 0s - loss: 0.1800 - acc: 0.9225 - val_loss: 0.1198 - val_acc: 0.9700
Epoch 5/100
400/400 [==============================] - 0s - loss: 0.1754 - acc: 0.9225 - val_loss: 0.1090 - val_acc: 0.9700
Epoch 6/100
400/400 [==============================] - 0s - loss: 0.1722 - acc: 0.9225 - val_loss: 0.1119 - val_acc: 0.9700
Epoch 7/100
400/400 [==============================] - 0s - loss: 0.1705 - acc: 0.9225 - val_loss: 0.1069 - val_acc: 0.9700
Epoch 8/100
400/400 [==============================] - 0s - loss: 0.1656 - acc: 0.9225 - val_loss: 0.1101 - val_acc: 0.9700
Epoch 9/100
400/400 [==============================] - 0s - loss: 0.1639 - acc: 0.9225 - val_loss: 0.1059 - val_acc: 0.9700
Epoch 10/100
400/400 [==============================] - 0s - loss: 0.1608 - acc: 0.9225 - val_loss: 0.0944 - val_acc: 0.9700
Epoch 11/100
400/400 [==============================] - 0s - loss: 0.1584 - acc: 0.9225 - val_loss: 0.0933 - val_acc: 0.9700
Epoch 12/100
400/400 [==============================] - 0s - loss: 0.1566 - acc: 0.9225 - val_loss: 0.0900 - val_acc: 0.9700
Epoch 13/100
400/400 [==============================] - 0s - loss: 0.1543 - acc: 0.9225 - val_loss: 0.0926 - val_acc: 0.9700
Epoch 14/100
400/400 [==============================] - 0s - loss: 0.1513 - acc: 0.9225 - val_loss: 0.0890 - val_acc: 0.9700
Epoch 15/100
400/400 [==============================] - 0s - loss: 0.1496 - acc: 0.9225 - val_loss: 0.0907 - val_acc: 0.9700
Epoch 16/100
400/400 [==============================] - 0s - loss: 0.1475 - acc: 0.9225 - val_loss: 0.0811 - val_acc: 0.9700
Epoch 17/100
400/400 [==============================] - 0s - loss: 0.1447 - acc: 0.9225 - val_loss: 0.0799 - val_acc: 0.9700
Epoch 18/100
400/400 [==============================] - 0s - loss: 0.1430 - acc: 0.9225 - val_loss: 0.0878 - val_acc: 0.9700
Epoch 19/100
400/400 [==============================] - 0s - loss: 0.1411 - acc: 0.9225 - val_loss: 0.0764 - val_acc: 0.9700
Epoch 20/100
400/400 [==============================] - 0s - loss: 0.1397 - acc: 0.9225 - val_loss: 0.0860 - val_acc: 0.9700
Epoch 21/100
400/400 [==============================] - 0s - loss: 0.1385 - acc: 0.9225 - val_loss: 0.0839 - val_acc: 0.9700
Epoch 22/100
400/400 [==============================] - 0s - loss: 0.1370 - acc: 0.9225 - val_loss: 0.0771 - val_acc: 0.9700
Epoch 23/100
400/400 [==============================] - 0s - loss: 0.1354 - acc: 0.9225 - val_loss: 0.0774 - val_acc: 0.9700
Epoch 24/100
400/400 [==============================] - 0s - loss: 0.1340 - acc: 0.9225 - val_loss: 0.0682 - val_acc: 0.9700
Epoch 25/100
400/400 [==============================] - 0s - loss: 0.1335 - acc: 0.9225 - val_loss: 0.0691 - val_acc: 0.9700
Epoch 26/100
400/400 [==============================] - 0s - loss: 0.1312 - acc: 0.9225 - val_loss: 0.0662 - val_acc: 0.9700
Epoch 27/100
400/400 [==============================] - 0s - loss: 0.1308 - acc: 0.9225 - val_loss: 0.0654 - val_acc: 0.9700
Epoch 28/100
400/400 [==============================] - 0s - loss: 0.1295 - acc: 0.9225 - val_loss: 0.0628 - val_acc: 0.9700
Epoch 29/100
400/400 [==============================] - 0s - loss: 0.1289 - acc: 0.9225 - val_loss: 0.0767 - val_acc: 0.9700
Epoch 30/100
400/400 [==============================] - 0s - loss: 0.1287 - acc: 0.9225 - val_loss: 0.0616 - val_acc: 0.9700
Epoch 31/100
400/400 [==============================] - 0s - loss: 0.1279 - acc: 0.9225 - val_loss: 0.0610 - val_acc: 0.9700
Epoch 32/100
400/400 [==============================] - 0s - loss: 0.1278 - acc: 0.9225 - val_loss: 0.0621 - val_acc: 0.9700
Epoch 33/100
400/400 [==============================] - 0s - loss: 0.1276 - acc: 0.9225 - val_loss: 0.0667 - val_acc: 0.9700
Epoch 34/100
400/400 [==============================] - 0s - loss: 0.1266 - acc: 0.9225 - val_loss: 0.0675 - val_acc: 0.9700
Epoch 35/100
400/400 [==============================] - 0s - loss: 0.1272 - acc: 0.9225 - val_loss: 0.0630 - val_acc: 0.9700
Epoch 36/100
400/400 [==============================] - 0s - loss: 0.1271 - acc: 0.9225 - val_loss: 0.0665 - val_acc: 0.9700
Epoch 37/100
400/400 [==============================] - 0s - loss: 0.1255 - acc: 0.9225 - val_loss: 0.0767 - val_acc: 0.9700
Epoch 38/100
400/400 [==============================] - 0s - loss: 0.1270 - acc: 0.9225 - val_loss: 0.0606 - val_acc: 0.9700
Epoch 39/100
400/400 [==============================] - 0s - loss: 0.1267 - acc: 0.9225 - val_loss: 0.0600 - val_acc: 0.9700
Epoch 40/100
400/400 [==============================] - 0s - loss: 0.1261 - acc: 0.9225 - val_loss: 0.0601 - val_acc: 0.9700
Epoch 41/100
400/400 [==============================] - 0s - loss: 0.1264 - acc: 0.9225 - val_loss: 0.0621 - val_acc: 0.9700
Epoch 42/100
400/400 [==============================] - 0s - loss: 0.1261 - acc: 0.9225 - val_loss: 0.0606 - val_acc: 0.9700
Epoch 43/100
400/400 [==============================] - 0s - loss: 0.1256 - acc: 0.9225 - val_loss: 0.0612 - val_acc: 0.9700
Epoch 44/100
400/400 [==============================] - 0s - loss: 0.1259 - acc: 0.9225 - val_loss: 0.0653 - val_acc: 0.9700
Epoch 45/100
400/400 [==============================] - 0s - loss: 0.1257 - acc: 0.9225 - val_loss: 0.0606 - val_acc: 0.9700
Epoch 46/100
400/400 [==============================] - 0s - loss: 0.1259 - acc: 0.9225 - val_loss: 0.0592 - val_acc: 0.9700
Epoch 47/100
400/400 [==============================] - 0s - loss: 0.1252 - acc: 0.9225 - val_loss: 0.0663 - val_acc: 0.9700
Epoch 48/100
400/400 [==============================] - 0s - loss: 0.1252 - acc: 0.9225 - val_loss: 0.0656 - val_acc: 0.9700
Epoch 49/100
400/400 [==============================] - 0s - loss: 0.1255 - acc: 0.9225 - val_loss: 0.0587 - val_acc: 0.9700
Epoch 50/100
400/400 [==============================] - 0s - loss: 0.1259 - acc: 0.9225 - val_loss: 0.0588 - val_acc: 0.9700
Epoch 51/100
400/400 [==============================] - 0s - loss: 0.1257 - acc: 0.9225 - val_loss: 0.0600 - val_acc: 0.9700
Epoch 52/100
400/400 [==============================] - 0s - loss: 0.1254 - acc: 0.9225 - val_loss: 0.0626 - val_acc: 0.9700
Epoch 53/100
400/400 [==============================] - 0s - loss: 0.1250 - acc: 0.9225 - val_loss: 0.0657 - val_acc: 0.9700
Epoch 54/100
400/400 [==============================] - 0s - loss: 0.1257 - acc: 0.9225 - val_loss: 0.0629 - val_acc: 0.9700
Epoch 55/100
400/400 [==============================] - 0s - loss: 0.1256 - acc: 0.9225 - val_loss: 0.0600 - val_acc: 0.9700
Epoch 56/100
400/400 [==============================] - 0s - loss: 0.1252 - acc: 0.9225 - val_loss: 0.0609 - val_acc: 0.9700
Epoch 57/100
400/400 [==============================] - 0s - loss: 0.1261 - acc: 0.9225 - val_loss: 0.0628 - val_acc: 0.9700
Epoch 58/100
400/400 [==============================] - 0s - loss: 0.1245 - acc: 0.9225 - val_loss: 0.0594 - val_acc: 0.9700
Epoch 59/100
400/400 [==============================] - 0s - loss: 0.1250 - acc: 0.9225 - val_loss: 0.0600 - val_acc: 0.9700
Epoch 60/100
400/400 [==============================] - 0s - loss: 0.1248 - acc: 0.9225 - val_loss: 0.0665 - val_acc: 0.9700
Epoch 61/100
400/400 [==============================] - 0s - loss: 0.1251 - acc: 0.9225 - val_loss: 0.0614 - val_acc: 0.9700
Epoch 62/100
400/400 [==============================] - 0s - loss: 0.1253 - acc: 0.9225 - val_loss: 0.0852 - val_acc: 0.9700
Epoch 63/100
400/400 [==============================] - 0s - loss: 0.1259 - acc: 0.9225 - val_loss: 0.0599 - val_acc: 0.9700
Epoch 64/100
400/400 [==============================] - 0s - loss: 0.1249 - acc: 0.9225 - val_loss: 0.0698 - val_acc: 0.9700
Epoch 65/100
400/400 [==============================] - 0s - loss: 0.1238 - acc: 0.9225 - val_loss: 0.0769 - val_acc: 0.9700
Epoch 66/100
400/400 [==============================] - 0s - loss: 0.1254 - acc: 0.9225 - val_loss: 0.0713 - val_acc: 0.9700
Epoch 67/100
400/400 [==============================] - 0s - loss: 0.1251 - acc: 0.9225 - val_loss: 0.0683 - val_acc: 0.9700
Epoch 68/100
400/400 [==============================] - 0s - loss: 0.1246 - acc: 0.9225 - val_loss: 0.0767 - val_acc: 0.9700
Epoch 69/100
400/400 [==============================] - 0s - loss: 0.1254 - acc: 0.9225 - val_loss: 0.0633 - val_acc: 0.9700
Epoch 70/100
400/400 [==============================] - 0s - loss: 0.1248 - acc: 0.9225 - val_loss: 0.0635 - val_acc: 0.9700
Epoch 71/100
400/400 [==============================] - 0s - loss: 0.1249 - acc: 0.9225 - val_loss: 0.0601 - val_acc: 0.9700
Epoch 72/100
400/400 [==============================] - 0s - loss: 0.1254 - acc: 0.9225 - val_loss: 0.0754 - val_acc: 0.9700
Epoch 73/100
400/400 [==============================] - 0s - loss: 0.1247 - acc: 0.9225 - val_loss: 0.0628 - val_acc: 0.9700
Epoch 74/100
400/400 [==============================] - 0s - loss: 0.1253 - acc: 0.9225 - val_loss: 0.0651 - val_acc: 0.9700
Epoch 75/100
400/400 [==============================] - 0s - loss: 0.1250 - acc: 0.9225 - val_loss: 0.0633 - val_acc: 0.9700
Epoch 76/100
400/400 [==============================] - 0s - loss: 0.1241 - acc: 0.9225 - val_loss: 0.0617 - val_acc: 0.9700
Epoch 77/100
400/400 [==============================] - 0s - loss: 0.1244 - acc: 0.9225 - val_loss: 0.0708 - val_acc: 0.9700
Epoch 78/100
400/400 [==============================] - 0s - loss: 0.1254 - acc: 0.9225 - val_loss: 0.0658 - val_acc: 0.9700
Epoch 79/100
400/400 [==============================] - 0s - loss: 0.1243 - acc: 0.9225 - val_loss: 0.0612 - val_acc: 0.9700
Epoch 80/100
400/400 [==============================] - 0s - loss: 0.1247 - acc: 0.9225 - val_loss: 0.0630 - val_acc: 0.9700
Epoch 81/100
400/400 [==============================] - 0s - loss: 0.1251 - acc: 0.9225 - val_loss: 0.0614 - val_acc: 0.9700
Epoch 82/100
400/400 [==============================] - 0s - loss: 0.1254 - acc: 0.9225 - val_loss: 0.0635 - val_acc: 0.9700
Epoch 83/100
400/400 [==============================] - 0s - loss: 0.1246 - acc: 0.9225 - val_loss: 0.0609 - val_acc: 0.9700
Epoch 84/100
400/400 [==============================] - 0s - loss: 0.1247 - acc: 0.9225 - val_loss: 0.0679 - val_acc: 0.9700
Epoch 85/100
400/400 [==============================] - 0s - loss: 0.1248 - acc: 0.9225 - val_loss: 0.0712 - val_acc: 0.9700
Epoch 86/100
400/400 [==============================] - 0s - loss: 0.1243 - acc: 0.9225 - val_loss: 0.0650 - val_acc: 0.9700
Epoch 87/100
400/400 [==============================] - 0s - loss: 0.1246 - acc: 0.9225 - val_loss: 0.0662 - val_acc: 0.9700
Epoch 88/100
400/400 [==============================] - 0s - loss: 0.1244 - acc: 0.9225 - val_loss: 0.0613 - val_acc: 0.9700
Epoch 89/100
400/400 [==============================] - 0s - loss: 0.1240 - acc: 0.9225 - val_loss: 0.0676 - val_acc: 0.9700
Epoch 90/100
400/400 [==============================] - 0s - loss: 0.1233 - acc: 0.9225 - val_loss: 0.0855 - val_acc: 0.9600
Epoch 91/100
400/400 [==============================] - 0s - loss: 0.1256 - acc: 0.9225 - val_loss: 0.0629 - val_acc: 0.9700
Epoch 92/100
400/400 [==============================] - 0s - loss: 0.1251 - acc: 0.9225 - val_loss: 0.0686 - val_acc: 0.9700
Epoch 93/100
400/400 [==============================] - 0s - loss: 0.1252 - acc: 0.9225 - val_loss: 0.0630 - val_acc: 0.9700
Epoch 94/100
400/400 [==============================] - 0s - loss: 0.1243 - acc: 0.9225 - val_loss: 0.0691 - val_acc: 0.9700
Epoch 95/100
400/400 [==============================] - 0s - loss: 0.1249 - acc: 0.9225 - val_loss: 0.0668 - val_acc: 0.9700
Epoch 96/100
400/400 [==============================] - 0s - loss: 0.1253 - acc: 0.9225 - val_loss: 0.0714 - val_acc: 0.9700
Epoch 97/100
400/400 [==============================] - 0s - loss: 0.1245 - acc: 0.9225 - val_loss: 0.0618 - val_acc: 0.9700
Epoch 98/100
400/400 [==============================] - 0s - loss: 0.1243 - acc: 0.9225 - val_loss: 0.0609 - val_acc: 0.9700
Epoch 99/100
400/400 [==============================] - 0s - loss: 0.1246 - acc: 0.9225 - val_loss: 0.0608 - val_acc: 0.9700
Epoch 100/100
400/400 [==============================] - 0s - loss: 0.1241 - acc: 0.9225 - val_loss: 0.0632 - val_acc: 0.9700

In [16]:
x_val = np.random.random((100,9))
y_val = np.array([np.linalg.eigvals(np.reshape(matrix,(3,3))) for matrix in x_val])

out = model.predict(x_val)

errors= np.mean(np.abs(y_val-out),axis=1)

%matplotlib inline
import matplotlib.pyplot as plt
#plt.scatter(range(len(errors)),errors,c='r',label='Accurate')
#plt.legend(loc=1)
plt.hist(errors)
plt.show()


Similar Matrix


In [ ]:
from keras.models import Sequential
from keras.layers import Dense

model = Sequential()
model.add(Dense(16, input_dim=9, init='uniform',activation='linear'))
model.add(Dense(8, activation='linear'))
model.add(Dense(3, activation='linear'))
model.compile(loss='mean_squared_error',optimizer='rmsprop',metrics=['accuracy'])

N = 500
x_train = np.random.random((N,9))

y_train = np.array([np.linalg.eigvals(np.reshape(matrix,(3,3))) for matrix in x_train])

hist = model.fit(x_train,y_train, nb_epoch=100, batch_size=30,verbose=1,shuffle=True, validation_split=0.2)